This notebook verifies our results on the classification of branching sets of the exceptional-symmetry fillings of the asymmetric L-space SnapPy census knots.

We start with a list containg the symmetric fillings together with their claimed branching sets.

In [1]:
branching_sets_of_symmetric_fillings=[['t12533',[
[(0,1), ['K12n407']], 
[(1,1), ['L13n7360(0,0)(2,1)']],
[(-1,1), ['L12n789']],
[(-2,1), ['L12n722(3,1)(0,0)']],
[(-1,2), ['L11n192(-3,1)(0,0)']],
[(-3,1), ['L11n350(0,0)(-4,1)(0,0)']]]],
['t12681',[
[(0,1), ['K11n89']], 
[(1,1), ['L11n419(0,0)(5,1)(0,0)', 'L12n1907(0,0)(3,1)(0,0)', 'L13n7356(0,0)(0,0)(2,1)']],
[(-1,1), ['L11n172']],
[(-1,2), ['L10a81(-3,1)(0,0)']],
[(-1,3), ['L11a282(-4, 1)(0,0)']],
[(-2,3), ['L13n4363(-5,3)(0,0)']]]],
['o9_38928',[
[(0,1), ['L11n178']],
[(1,1), ['K11n172']], 
[(-1,1), ['L12n703(-3,1)(0,0)']],
[(-2,1), ['L12n1949(0,0)(2, 1)(0,0)']],
[(2,1), ['L13n7433(0,0)(-3, 1)(0,0)', 'L10a105(0,0)(-2,1)', 'L10n54(-8,1)(0,0)']]]],
['o9_39162',[
[(0,1), ['K12n278']], 
[(1,1), ['L12n1050']],
[(-1,1), ['L13n9864(-3,1)(0,0)(0,0)']],
[(2,1), ['L10n44(7,1)(0,0)']],
[(1,2), ['L12n784(3,1)(0,0)']],
[(1,4), ['L13n4343(-7,4)(0,0)']]]],
['o9_40363',[
[(0,1), ['K12n479']], 
[(1,1), ['L14n43377(0,0)(2, 1)', 'L13n9451(0,0)(3,1)(0,0)', 'L11n419(0,0)(7,1)(0,0)']],
[(-1,1), ['L12n785']],
[(-1,2), ['L11a225(-3,1)(0,0)']],
[(-1,3), ['L12a1861(0,0)(4,1)(0,0)']],
[(-3,4), ['L13n4363(-7,4)(0,0)']]]],
['o9_40487',[
[(0,1), ['L11n152']],
[(1,1), ['K11n147']], 
[(-1,1), ['L12n722(-3,1)(0,0)']],
[(1,2), ['L12n968(-3,1)(0,0)']],
[(2,1), ['L11n333(0,0)(2, 1)(0,0)']],
[(3,1), ['L12n1314(7,1)(0,0)']],
[(-2,1), ['L12n1041(-2,1)(0,0)']]]],
['o9_40504',[
[(0,1), ['L11n179']],
[(1,1), ['K11n166']], 
[(-1,1), ['L12n715(-3,1)(0,0)']],
[(-2,1), ['L12n1037(-2, 1)(0,0)']],
[(2,1), ['L13n7421(0,0)(-3, 1)(0,0)', 'L10a146(0,0)(-2,1)(0,0)', 'L13n8521(0,0)(5, 1)(0,0)']]]],
['o9_40582',[
[(0,1), ['L13n4413']],
[(1,1), ['K13n2958']],
[(-1,1), ['L10n44(-5,3)(0,0)']],
[(1,2), ['L12n996(3,1)(0,0)']],
[(2,1), ['L13n9833(3,1)(0,0)(0,0)']],
[(4,1), ['L11n350(0,0)(-5,1)(0,0)']]]],
['o9_42675',[
[(0,1), ['L12n702']],
[(1,1), ['L14n24428(5, 3)(0,0)']],
[(-1,1), ['K12n730']], 
[(2,1), ['L13n7552(0,0)(-2,1)(0,0)']],
[(-2,1), ['L11n223(-2,1)(0,0)', 'L10n24(0,0)(6,1)', 'L13n9832(0,0)(8,3)(0,0)']]]]]
In [2]:
import snappy

def double_branched_cover(link):
    """
    Returns the double branched covers of the link. This works also for links in a more general manifold. 
    Note that a knot in a more general manifold may have more than one double branched cover 
    (or no double branched cover at all if the knot represents a primitive element in homology).
    This function will return the complete list of all double branched covers of the link.
    """
    L=link.copy()
    for i in range(L.num_cusps()):
        if L.cusp_info(i).filling==(0.0, 0.0):
            L.dehn_fill((2,0),i)
    return [cov for cov in L.covers(2) if (2.0, 0.0) not in cov.cusp_info('filling')]

def better_is_isometric_to(X,Y,index):
    """
    Returns True if X and Y are isometric.
    Returns False if X and Y have different homologies. TO DO: Use volume to rigorously distinguish X and Y.
    Returns 'unclear' if SnapPy cannot verify it.
    The higher the index the harder SnapPy tries.
    """     
    w='unclear'
    if X.homology()!=Y.homology():
        w=False
    if w=='unclear':
        for i in (0,index):
            try:
                w=X.is_isometric_to(Y)
            except RuntimeError:
                pass
            except snappy.SnapPeaFatalError:
                pass
            if w==True:
                break
            if w==False:
                w='unclear'
            X.randomize()
            Y.randomize()
            i=i+1
    return w


### The following two functions are written by Dunfield and search for positive triangulations.

def all_positive(manifold):
    return manifold.solution_type() == 'all tetrahedra positively oriented'

def find_positive_triangulation(manifold, tries=100):
    M = manifold.copy()
    for i in range(tries):
        if all_positive(M):
            return M
        M.randomize()
    for d in M.dual_curves():
        X = M.drill(d)
        X = X.filled_triangulation()
        X.dehn_fill((1,0))
        for i in range(tries):
            if all_positive(X):
                return X
            X.randomize()

    # In the closed case, here is another trick.
    if all(not c for c in M.cusp_info('is_complete')):
        for i in range(tries):
            # Drills out a random edge
            X = M.__class__(M.filled_triangulation())
            if all_positive(X):
                return X
            M.randomize()
            
def better_find_positive_triangulation(M,tries=1):
    '''
    Search for a positive triangulation, but ignores errors.
    '''
    RandomizeCount=0
    while RandomizeCount<tries:
        try:
            X=find_positive_triangulation(M)
            return X
        except snappy.SnapPeaFatalError:
            M.randomize()
            RandomizeCount=RandomizeCount+1
    return None

def fill_triangulation(M):
    '''
    Fills all cusps but one.
    '''
    if M.num_cusps()==1:
        return M
    M=M.filled_triangulation([0])
    M=fill_triangulation(M)
    return M
In [3]:
#### This is Dunfield's util.py from his exceptional census

####  for a snappy manifold M descibed as a single filling of a cusp (so do filled_triangulation() as needed) 
####  the command regina_name(M) gives what regina identifies M as

"""

This file provides functions for working with Regina (with a little
help from SnapPy) to:

1. Give a standard name ("identify") manifolds, especially Seifert and
   graph manifolds.

2. Find essential tori.

3. Try to compute the JSJ decomposition.

"""

import regina
import snappy
import re
import networkx as nx

def appears_hyperbolic(M):
    acceptable = ['all tetrahedra positively oriented',
                  'contains negatively oriented tetrahedra']
    return M.solution_type() in acceptable and M.volume() > 0

def children(packet):
    child = packet.firstChild()
    while child:
        yield child
        child = child.nextSibling()

def to_regina(data):
    if hasattr(data, '_to_string'):
        data = data._to_string()
    if isinstance(data, str):
        if data.find('(') > -1:
            data = closed_isosigs(data)[0]
        return regina.Triangulation3(data)
    assert isinstance(data, regina.Triangulation3)
    return data

def extract_vector(surface):
    """
    Extract the raw vector of the (almost) normal surface in Regina's
    NS_STANDARD coordinate system.
    """
    S = surface
    T = S.triangulation()
    n = T.countTetrahedra()
    ans = []
    for i in range(n):
        for j in range(4):
            ans.append(S.triangles(i, j))
        for j in range(3):
            ans.append(S.quads(i, j))
    A = regina.NormalSurface(T, regina.NS_STANDARD, ans)
    assert A.sameSurface(S)
    return ans

def haken_sum(S1, S2):
    T = S1.triangulation()
    assert S1.locallyCompatible(S2)
    v1, v2 = extract_vector(S1), extract_vector(S2)
    sum_vec = [x1 + x2 for x1, x2 in zip(v1, v2)]
    A = regina.NormalSurface(T, regina.NS_STANDARD, sum_vec)
    assert S1.locallyCompatible(A) and S2.locallyCompatible(A)
    assert S1.eulerChar() + S2.eulerChar() == A.eulerChar()
    return A


def census_lookup(regina_tri):
    """
    Should the input triangulation be in Regina's census, return the
    name of the manifold, dropping the triangulation number.
    """
    hits = regina.Census.lookup(regina_tri)
    hit = hits.first()
    if hit is not None:
        name = hit.name()
        match = re.search('(.*) : #\d+$', name)
        if match:
            return match.group(1)
        else:
            return match

def standard_lookup(regina_tri):
    match = regina.StandardTriangulation.isStandardTriangulation(regina_tri)
    if match:
        return match.manifold()

def closed_isosigs(snappy_manifold, trys=20, max_tets=50):
    """
    Generate a slew of 1-vertex triangulations of a closed manifold
    using SnapPy.
    
    >>> M = snappy.Manifold('m004(1,2)')
    >>> len(closed_isosigs(M, trys=5)) > 0
    True
    """
    M = snappy.Manifold(snappy_manifold)
    assert M.cusp_info('complete?') == [False]
    surgery_descriptions = [M.copy()]

    try:
        for curve in M.dual_curves():
            N = M.drill(curve)
            N.dehn_fill((1,0), 1)
            surgery_descriptions.append(N.filled_triangulation([0]))
    except snappy.SnapPeaFatalError:
        pass

    if len(surgery_descriptions) == 1:
        # Try again, but unfill the cusp first to try to find more
        # dual curves.
        try:
            filling = M.cusp_info(0).filling
            N = M.copy()
            N.dehn_fill((0, 0), 0)
            N.randomize()
            for curve in N.dual_curves():
                D = N.drill(curve)
                D.dehn_fill([filling, (1,0)])
                surgery_descriptions.append(D.filled_triangulation([0]))
        except snappy.SnapPeaFatalError:
            pass

    ans = set()
    for N in surgery_descriptions:
        for i in range(trys):
            T = N.filled_triangulation()
            if T._num_fake_cusps() == 1:
                n = T.num_tetrahedra()
                if n <= max_tets:
                    ans.add((n, T.triangulation_isosig(decorated=False)))
            N.randomize()

    return [iso for n, iso in sorted(ans)]

def best_match(matches):
    """
    Prioritize the most concise description that Regina provides to
    try to avoid things like the Seifert fibered space of a node being
    a solid torus or having several nodes that can be condensed into a
    single Seifert fibered piece.
    """
    
    def score(m):
        if isinstance(m, regina.SFSpace):
            s = 0
        elif isinstance(m, regina.GraphLoop):
            s = 1
        elif isinstance(m, regina.GraphPair):
            s = 2
        elif isinstance(m, regina.GraphTriple):
            s = 3
        elif m is None:
            s = 10000
        else:
            s = 4
        return (s, str(m))
    return min(matches, key=score)

def identify_with_torus_boundary(regina_tri):
    """
    Use the combined power of Regina and SnapPy to try to give a name
    to the input manifold.
    """
    
    kind, name = None, None
    
    P = regina_tri.clone()
    P.finiteToIdeal()
    P.intelligentSimplify()
    M = snappy.Manifold(P.isoSig())
    M.simplify()
    if appears_hyperbolic(M):
        for i in range(100):
            if M.solution_type() == 'all tetrahedra positively oriented':
                break
            M.randomize()
        
        if not M.verify_hyperbolicity(bits_prec=100):
            raise RuntimeError('Cannot prove hyperbolicity for ' +
                               M.triangulation_isosig())
        kind = 'hyperbolic'
        ids = M.identify()
        if ids:
            name = ids[0].name()
    else:
        match = standard_lookup(regina_tri)
        if match is None:
            Q = P.clone()
            Q.idealToFinite()
            Q.intelligentSimplify()
            match = standard_lookup(Q)
        if match is not None:
            kind = match.__class__.__name__
            name = str(match)
        else:
            name = P.isoSig()
    return kind, name
            
    
    

def is_toroidal(regina_tri):
    """
    Checks for essential tori and returns the pieces of the
    associated partial JSJ decomposition.
    
    >>> T = to_regina('hLALAkbccfefgglpkusufk')  # m004(4,1)
    >>> is_toroidal(T)[0]
    True
    >>> T = to_regina('hvLAQkcdfegfggjwajpmpw')  # m004(0,1)
    >>> is_toroidal(T)[0]
    True
    >>> T = to_regina('nLLLLMLPQkcdgfihjlmmlkmlhshnrvaqtpsfnf')  # 5_2(10,1)
    >>> T.isHaken()
    True
    >>> is_toroidal(T)[0]
    False

    Note: currently checks all fundamental normal tori; possibly
    the theory lets one just check *vertex* normal tori.
    """
    T = regina_tri
    assert T.isZeroEfficient()
    surfaces = regina.NNormalSurfaceList.enumerate(T,
                          regina.NS_QUAD, regina.NS_FUNDAMENTAL)
    for i in range(surfaces.size()):
        S = surfaces.surface(i)
        if S.eulerChar() == 0:
            if not S.isOrientable():
                S = S.doubleSurface()
            assert S.isOrientable()
            X = S.cutAlong()
            X.intelligentSimplify()
            X.splitIntoComponents()
            pieces = list(children(X))
            if all(not C.hasCompressingDisc() for C in pieces):
                ids = [identify_with_torus_boundary(C) for C in pieces]
                return (True, sorted(ids))
                
    return (False, None)


def decompose_along_tori(regina_tri):
    """
    First, finds all essential normal tori in the manifold associated
    with fundamental normal surfaces.  Then takes a maximal disjoint
    collection of these tori, namely the one with the fewest tori
    involved, and cuts the manifold open along it.  It tries to
    identify the pieces, removing any (torus x I) components. 

    Returns: (has essential torus, list of pieces)

    Note: This may fail to be the true JSJ decomposition because there
    could be (torus x I)'s in the list of pieces and it might well be
    possible to amalgamate some of the pieces into a single SFS.
    """
    
    T = regina_tri
    assert T.isZeroEfficient()
    essential_tori = []
    surfaces = regina.NNormalSurfaceList.enumerate(T,
                          regina.NS_QUAD, regina.NS_FUNDAMENTAL)
    for i in range(surfaces.size()):
        S = surfaces.surface(i)
        if S.eulerChar() == 0:
            if not S.isOrientable():
                S = S.doubleSurface()
            assert S.isOrientable()
            X = S.cutAlong()
            X.intelligentSimplify()
            X.splitIntoComponents()
            pieces = list(children(X))
            if all(not C.hasCompressingDisc() for C in pieces):
                essential_tori.append(S)

    if len(essential_tori) == 0:
        return False, None
    
    D = nx.Graph()
    for a, A in enumerate(essential_tori):
        for b, B in enumerate(essential_tori):
            if a < b:
                if A.disjoint(B):
                    D.add_edge(a, b)

    cliques = list(nx.find_cliques(D))
    if len(cliques) == 0:
        clique = [0]
    else:
        clique = min(cliques, key=len)
    clique = [essential_tori[c] for c in clique]
    A = clique[0]
    for B in clique[1:]:
        A = haken_sum(A, B)

    X = A.cutAlong()
    X.intelligentSimplify()
    X.splitIntoComponents()
    ids = [identify_with_torus_boundary(C) for C in list(children(X))]
    # Remove products
    ids = [i for i in ids if i[1] not in ('SFS [A: (1,1)]', 'A x S1')]
    return (True, sorted(ids))

def regina_name(closed_snappy_manifold, trys=100):
    """
    >>> regina_name('m004(1,0)')
    'S3'
    >>> regina_name('s006(-2, 1)')
    'SFS [A: (5,1)] / [ 0,-1 | -1,0 ]'
    >>> regina_name('m010(-1, 1)')
    'L(3,1) # RP3'
    >>> regina_name('m022(-1,1)')
    'SFS [S2: (3,2) (3,2) (4,-3)]'
    >>> regina_name('v0004(0, 1)')
    'SFS [S2: (2,1) (4,1) (15,-13)]'
    >>> regina_name('m305(1, 0)')
    'L(3,1) # RP3'
    """
    M = snappy.Manifold(closed_snappy_manifold)
    isosigs = closed_isosigs(M, trys=trys, max_tets=25)
    if len(isosigs) == 0:
        return
    T = to_regina(isosigs[0])
    if T.isIrreducible():
        if T.countTetrahedra() <= 11:
            for i in range(3):
                T.simplifyExhaustive(i)
                name = census_lookup(T)
                if name is not None:
                    return name
            
        matches = [standard_lookup(to_regina(iso)) for iso in isosigs]
        match = best_match(matches)
        if match is not None:
            return str(match)
    else:
        T.connectedSumDecomposition()
        pieces = [regina_name(P) for P in children(T)]
        if None not in pieces:
            return ' # '.join(sorted(pieces))

if __name__ == '__main__':
    import doctest
    print(doctest.testmod())
<>:84: DeprecationWarning: invalid escape sequence \d
<>:84: DeprecationWarning: invalid escape sequence \d
<ipython-input-3-4bef5675ef00>:84: DeprecationWarning: invalid escape sequence \d
  match = re.search('(.*) : #\d+$', name)
/usr/lib/python3.8/inspect.py:520: DeprecationWarning: 
Importing absolute_igusa_invariants_kohel from here is deprecated. If you need to use it, please import it directly from sage.schemes.hyperelliptic_curves.invariants
See https://trac.sagemath.org/28064 for details.
  while _is_wrapper(func):
/usr/lib/python3.8/inspect.py:520: DeprecationWarning: 
Importing absolute_igusa_invariants_wamelen from here is deprecated. If you need to use it, please import it directly from sage.schemes.hyperelliptic_curves.invariants
See https://trac.sagemath.org/28064 for details.
  while _is_wrapper(func):
/usr/lib/python3.8/inspect.py:520: DeprecationWarning: this is being removed from the global namespace
See https://trac.sagemath.org/25785 for details.
  while _is_wrapper(func):
/usr/lib/python3.8/inspect.py:520: DeprecationWarning: 
Importing all_max_clique from here is deprecated. If you need to use it, please import it directly from sage.graphs.cliquer
See https://trac.sagemath.org/26200 for details.
  while _is_wrapper(func):
/usr/lib/python3.8/inspect.py:520: DeprecationWarning: 
Importing backtrack_all from here is deprecated. If you need to use it, please import it directly from sage.games.sudoku_backtrack
See https://trac.sagemath.org/27066 for details.
  while _is_wrapper(func):
/usr/lib/python3.8/inspect.py:520: DeprecationWarning: 
Importing berlekamp_massey from here is deprecated. If you need to use it, please import it directly from sage.matrix.berlekamp_massey
See https://trac.sagemath.org/27066 for details.
  while _is_wrapper(func):
/usr/lib/python3.8/inspect.py:520: DeprecationWarning: this is being removed from the global namespace
See https://trac.sagemath.org/25785 for details.
  while _is_wrapper(func):
/usr/lib/python3.8/inspect.py:520: DeprecationWarning: 
Importing buzzard_tpslopes from here is deprecated. If you need to use it, please import it directly from sage.modular.buzzard
See https://trac.sagemath.org/27066 for details.
  while _is_wrapper(func):
/usr/lib/python3.8/inspect.py:520: DeprecationWarning: 
Importing clebsch_invariants from here is deprecated. If you need to use it, please import it directly from sage.schemes.hyperelliptic_curves.invariants
See https://trac.sagemath.org/28064 for details.
  while _is_wrapper(func):
/usr/lib/python3.8/inspect.py:520: DeprecationWarning: 
Importing clique_number from here is deprecated. If you need to use it, please import it directly from sage.graphs.cliquer
See https://trac.sagemath.org/26200 for details.
  while _is_wrapper(func):
/usr/lib/python3.8/inspect.py:520: DeprecationWarning: 
Importing convergents from here is deprecated. If you need to use it, please import it directly from sage.rings.continued_fraction
See https://trac.sagemath.org/27066 for details.
  while _is_wrapper(func):
/usr/lib/python3.8/inspect.py:511: DeprecationWarning: sage.interacts.debugger is deprecated because it is meant for the deprecated Sage Notebook
See https://trac.sagemath.org/27531 for details.
  return hasattr(f, '__wrapped__')
/usr/lib/python3.8/inspect.py:520: DeprecationWarning: 
Importing designs_from_XML from here is deprecated. If you need to use it, please import it directly from sage.combinat.designs.ext_rep
See https://trac.sagemath.org/27066 for details.
  while _is_wrapper(func):
/usr/lib/python3.8/inspect.py:520: DeprecationWarning: 
Importing designs_from_XML_url from here is deprecated. If you need to use it, please import it directly from sage.combinat.designs.ext_rep
See https://trac.sagemath.org/27066 for details.
  while _is_wrapper(func):
/usr/lib/python3.8/inspect.py:520: DeprecationWarning: this is being removed from the global namespace
See https://trac.sagemath.org/25785 for details.
  while _is_wrapper(func):
---------------------------------------------------------------------------
ModuleNotFoundError                       Traceback (most recent call last)
<ipython-input-3-4bef5675ef00> in <module>
    350 if __name__ == '__main__':
    351     import doctest
--> 352     print(doctest.testmod())

/usr/lib/python3.8/doctest.py in testmod(m, name, globs, verbose, report, optionflags, extraglobs, raise_on_error, exclude_empty)
   1953         runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
   1954 
-> 1955     for test in finder.find(m, name, globs=globs, extraglobs=extraglobs):
   1956         runner.run(test)
   1957 

/usr/lib/python3.8/doctest.py in find(self, obj, name, module, globs, extraglobs)
    937         # Recursively explore `obj`, extracting DocTests.
    938         tests = []
--> 939         self._find(tests, obj, name, module, source_lines, globs, {})
    940         # Sort the tests by alpha order of names, for consistency in
    941         # verbose-mode output.  This was a feature of doctest in Pythons

/usr/lib/python3.8/doctest.py in _find(self, tests, obj, name, module, source_lines, globs, seen)
    996                 valname = '%s.%s' % (name, valname)
    997                 # Recurse to functions & classes.
--> 998                 if ((inspect.isroutine(inspect.unwrap(val))
    999                      or inspect.isclass(val)) and
   1000                     self._from_module(module, val)):

/usr/lib/python3.8/inspect.py in unwrap(func, stop)
    518     memo = {id(f): f}
    519     recursion_limit = sys.getrecursionlimit()
--> 520     while _is_wrapper(func):
    521         func = func.__wrapped__
    522         id_func = id(func)

/usr/lib/python3.8/inspect.py in _is_wrapper(f)
    509     if stop is None:
    510         def _is_wrapper(f):
--> 511             return hasattr(f, '__wrapped__')
    512     else:
    513         def _is_wrapper(f):

/usr/lib/python3/dist-packages/sage/misc/lazy_import.pyx in sage.misc.lazy_import.LazyImport.__getattr__ (build/cythonized/sage/misc/lazy_import.c:3536)()
    319             True
    320         """
--> 321         return getattr(self.get_object(), attr)
    322 
    323     # We need to wrap all the slot methods, as they are not forwarded

/usr/lib/python3/dist-packages/sage/misc/lazy_import.pyx in sage.misc.lazy_import.LazyImport.get_object (build/cythonized/sage/misc/lazy_import.c:2347)()
    186         if likely(self._object is not None):
    187             return self._object
--> 188         return self._get_object()
    189 
    190     cpdef _get_object(self):

/usr/lib/python3/dist-packages/sage/misc/lazy_import.pyx in sage.misc.lazy_import.LazyImport._get_object (build/cythonized/sage/misc/lazy_import.c:2586)()
    218         elif self._at_startup and not startup_guard:
    219             print('Option ``at_startup=True`` for lazy import {0} not needed anymore'.format(self._name))
--> 220         self._object = getattr(__import__(self._module, {}, {}, [self._name]), self._name)
    221         name = self._as_name
    222         if self._deprecation is not None:

ModuleNotFoundError: No module named 'sagenb'
In [4]:
for x in branching_sets_of_symmetric_fillings:
    print(x[0])
    K=snappy.Manifold(x[0])
    for y in x[1]:
        K.dehn_fill(y[0])
        if y[1]==[]:
            print(y[0],'NO BRANCHING SET')
        for branching_set in y[1]:
            B=snappy.Manifold(branching_set)
            F=B.filled_triangulation()
            DBC=double_branched_cover(F)
            w=False
            for D in DBC:
                w=better_is_isometric_to(D,K,index=1000)
                if w=='unclear':
                    X=better_find_positive_triangulation(K)
                    Y=better_find_positive_triangulation(D)
                    if X and Y is not None:
                        w=better_is_isometric_to(X,Y,100)
                if w=='unclear':
                    DF=fill_triangulation(D)
                    K_regina=regina_name(K)
                    if K_regina is not None and regina_name(DF)==K_regina:
                        w=True
                if w==True:
                    print(y[0],B,w)
                    break
            if w!=True:
                print(y[0],B,'UNCLEAR')
                        
t12533
(0, 1) K12n407(0,0) True
(1, 1) L13n7360(0,0)(2,1)(0,0) True
(-1, 1) L12n789(0,0)(0,0) True
(-2, 1) L12n722(3,1)(0,0) True
(-1, 2) L11n192(-3,1)(0,0) True
(-3, 1) L11n350(0,0)(-4,1)(0,0) True
t12681
(0, 1) K11n89(0,0) True
(1, 1) L11n419(0,0)(5,1)(0,0) True
(1, 1) L12n1907(0,0)(3,1)(0,0) True
(1, 1) L13n7356(0,0)(0,0)(2,1) True
(-1, 1) L11n172(0,0)(0,0) True
(-1, 2) L10a81(-3,1)(0,0) True
(-1, 3) L11a282(-4,1)(0,0) True
(-2, 3) L13n4363(-5,3)(0,0) True
o9_38928
(0, 1) L11n178(0,0)(0,0) True
(1, 1) K11n172(0,0) True
(-1, 1) L12n703(-3,1)(0,0) True
(-2, 1) L12n1949(0,0)(2,1)(0,0) True
(2, 1) L13n7433(0,0)(-3,1)(0,0) True
(2, 1) L10a105(0,0)(-2,1) True
(2, 1) L10n54(-8,1)(0,0) True
o9_39162
(0, 1) K12n278(0,0) True
(1, 1) L12n1050(0,0)(0,0) True
(-1, 1) L13n9864(-3,1)(0,0)(0,0) True
(2, 1) L10n44(7,1)(0,0) True
(1, 2) L12n784(3,1)(0,0) True
(1, 4) L13n4343(-7,4)(0,0) True
o9_40363
(0, 1) K12n479(0,0) True
(1, 1) L14n43377(0,0)(2,1)(0,0) True
(1, 1) L13n9451(0,0)(3,1)(0,0) True
(1, 1) L11n419(0,0)(7,1)(0,0) True
(-1, 1) L12n785(0,0)(0,0) True
(-1, 2) L11a225(-3,1)(0,0) True
(-1, 3) L12a1861(0,0)(4,1)(0,0) True
(-3, 4) L13n4363(-7,4)(0,0) True
o9_40487
(0, 1) L11n152(0,0)(0,0) True
(1, 1) K11n147(0,0) True
(-1, 1) L12n722(-3,1)(0,0) True
(1, 2) L12n968(-3,1)(0,0) True
(2, 1) L11n333(0,0)(2,1)(0,0) True
(3, 1) L12n1314(7,1)(0,0) True
(-2, 1) L12n1041(-2,1)(0,0) True
o9_40504
(0, 1) L11n179(0,0)(0,0) True
(1, 1) K11n166(0,0) True
(-1, 1) L12n715(-3,1)(0,0) True
(-2, 1) L12n1037(-2,1)(0,0) True
(2, 1) L13n7421(0,0)(-3,1)(0,0) True
(2, 1) L10a146(0,0)(-2,1)(0,0) True
(2, 1) L13n8521(0,0)(5,1)(0,0) True
o9_40582
(0, 1) L13n4413(0,0)(0,0) True
(1, 1) K13n2958(0,0) True
(-1, 1) L10n44(-5,3)(0,0) True
(1, 2) L12n996(3,1)(0,0) True
(2, 1) L13n9833(3,1)(0,0)(0,0) True
(4, 1) L11n350(0,0)(-5,1)(0,0) True
o9_42675
(0, 1) L12n702(0,0)(0,0) True
(1, 1) L14n24428(5,3)(0,0) True
(-1, 1) K12n730(0,0) True
(2, 1) L13n7552(0,0)(-2,1)(0,0) True
(-2, 1) L11n223(-2,1)(0,0) True
(-2, 1) L10n24(0,0)(6,1) True
(-2, 1) L13n9832(0,0)(8,3)(0,0) True

This confirms that the double branched cover along the claimed branching sets are homeomorphic to the corresponding symmetric fillings.